Close

@InProceedings{PassosJśniorPapa:2017:FiInRe,
               author = "Passos J{\'u}nior, Leandro Aparecido and Papa, Jo{\~a}o Paulo",
          affiliation = "{Federal University of S{\~a}o Carlos} and {S{\~a}o Paulo State 
                         University}",
                title = "Fine-Tuning Infinity Restricted Boltzmann Machines",
            booktitle = "Proceedings...",
                 year = "2017",
               editor = "Torchelsen, Rafael Piccin and Nascimento, Erickson Rangel do and 
                         Panozzo, Daniele and Liu, Zicheng and Farias, Myl{\`e}ne and 
                         Viera, Thales and Sacht, Leonardo and Ferreira, Nivan and Comba, 
                         Jo{\~a}o Luiz Dihl and Hirata, Nina and Schiavon Porto, Marcelo 
                         and Vital, Creto and Pagot, Christian Azambuja and Petronetto, 
                         Fabiano and Clua, Esteban and Cardeal, Fl{\'a}vio",
         organization = "Conference on Graphics, Patterns and Images, 30. (SIBGRAPI)",
            publisher = "IEEE Computer Society",
              address = "Los Alamitos",
             keywords = "Deep Learning, Infinity Restricted Boltzmann Machines, 
                         Meta-heuristics.",
             abstract = "Restricted Boltzmann Machines (RBMs) have received special 
                         attention in the last decade due to their outstanding results in 
                         number of applications, such as face and human motion recognition, 
                         and collaborative filtering, among others. However, one of the 
                         main concerns about RBMs is related to the number of hidden units, 
                         which is application-dependent. Infinite RBM (iRBM) was proposed 
                         as an alternative to the regular RBM, where the number of units in 
                         the hidden layer grows as long as it is necessary, dropping out 
                         the need for selecting a proper number of hidden units. However, a 
                         less sensitive regularization parameter is introduced as well. 
                         This paper proposes to fine-tune iRBM hyper-parameters by means of 
                         meta-heuristic techniques such as Particle Swarm Optimization, Bat 
                         Algorithm, Cuckoo Search, and the Firefly Algorithm. The proposed 
                         approach is validated in the context of binary image 
                         reconstruction over two well-known datasets. Furthermore, the 
                         experimental results compare the robustness of the iRBM against 
                         the RBM and Ordered RBM (oRBM) using two different learning 
                         algorithms, showing the suitability in using meta-heuristics for 
                         hyper-parameter fine-tuning in RBM-based models.",
  conference-location = "Niter{\'o}i, RJ, Brazil",
      conference-year = "17-20 Oct. 2017",
                  doi = "10.1109/SIBGRAPI.2017.15",
                  url = "http://dx.doi.org/10.1109/SIBGRAPI.2017.15",
             language = "en",
                  ibi = "8JMKD3MGPAW/3PF4NAB",
                  url = "http://urlib.net/ibi/8JMKD3MGPAW/3PF4NAB",
           targetfile = "PID4954803.pdf",
        urlaccessdate = "2024, Apr. 29"
}


Close